home
***
CD-ROM
|
disk
|
FTP
|
other
***
search
/
Games of Daze
/
Infomagic - Games of Daze (Summer 1995) (Disc 1 of 2).iso
/
x2ftp
/
msdos
/
ai
/
netstuff
/
delta.c
< prev
next >
Wrap
C/C++ Source or Header
|
1991-02-26
|
20KB
|
631 lines
/**
This C language example is printed in Appendix A of Adaptive
Pattern Recognition and Neural Networks, by Yoh-Han Pao. The
algorithm and notation used come from chapter 5 of this book. The
program will run under VAX/VMS or PC-DOS/MS-DOS. The only changes
I made were to correct an obvious error in the routine called
"forward()" where the formal parameter lacked a type specification,
and to decrease some of the defined constants in order to allow
linking within the constraints of my memory model (see original
sizes in parentheses inside comments on each). I hope there are
no typing errors. I compiled and linked using Microsoft QuickC 2.0.
For a possible scenario in running this program, see the file named
"scenario" which uses the files par3.dat and data.dat. All are
included on this diskette.
Marilyn M. Nelson, February 1991 **/
/**
PROGRAM DESCRIPTION:
THIS PROGRAM ALLOWS A USER TO BUILD A GENERALIZED DELTA RULE
NET FOR SUPERVISED LEARNING. USER CAN SPECIFY THE NUMBER OF
INPUT & OUTPUT UNITS, NUMBER OF HIDDEN LAYERS AND NUMBER OF
UNITS IN EACH HIDDEN LAYER.
AFTER THE NET IS BUILT, LEARNING TAKES PLACE IN THE NET WITH
A GIVEN SET OF TRAINING SAMPLES. USER SPECIFIES VALUES OF
THE LEARNING RATE ETA, THE MOMENTUM RATE ALPHA, MAXIMUM
TOLERANCE ERRORS AND MAXIMUM NUMBER OF ITERATIONS.
AFTER LEARNING, ALL THE INFORMATION RELEVANT TO THE STRUCTURE
OF THE NET, INCLUDING WEIGHTS AND THRESHOLDS ARE STORED IN
FILES.
OUTPUTS CAN BE GENERATED FOR NEW PATTERNS BY READING FROM
FILE AND BY RECONSTRUCTING THE NET.
TRAINING SET SAMPLES AND ADDITIONAL SAMPLES FOR PROCESSING
ARE STORED IN FILES.
**/
#include <stdio.h>
#include <math.h>
#include <ctype.h>
#ifndef VAX /* for declaration of calloc() on PC or compatible */
#include <malloc.h>
#endif
/* define constants used throughout functions */
#define NMXUNIT 40 /* max no. of units in a layer (50) */
#define NMXHLR 3 /* max no. of hidden layers ( 5) */
#define NMXOATTR 40 /* max no. of output features (50) */
#define NMXINP 100 /* max no. of input samples (200) */
#define NMXIATTR 40 /* max no. of input features (50) */
#define SEXIT 3 /* exit successfully */
#define RESTRT 2 /* restart */
#define FEXIT 1 /* exit in failure */
#define CONTNE 0 /* continue calculation */
/* Data base : declarations of variables */
float eta; /** learning rate **/
float alpha; /** momentum rate **/
float err_curr; /** normalized system error **/
float maxe; /** max allowed system error **/
float maxep; /** max allowed patter error **/
float *wtptr[NMXHLR+1];
float *outptr[NMXHLR+2];
float *errptr[NMXHLR+2];
float *delw[NMXHLR+1];
float target[NMXINP][NMXOATTR];
float input[NMXINP][NMXIATTR], ep[NMXINP];
float outpt[NMXINP][NMXOATTR];
int nunit[NMXHLR+2], nhlayer, ninput, ninattr, noutattr;
int result, cnt, cnt_num;
int nsnew, nsold;
char task_name[20];
FILE *fp1, *fp2, *fp3, *fopen();
int fplot10;
/* random number generator
(computer independent) */
long randseed = 568731L;
int random()
{
randseed = 15625L * randseed + 22221L;
return((randseed >> 16) & 0x7FFF);
}
/* allocate dynamic storage for the set */
void init()
{
int len1, len2, i, k;
float *p1, *p2, *p3, *p4;
len1 = len2 = 0;
nunit[nhlayer+2] = 0;
for (i=0; i<(nhlayer + 2); i++) {
len1 += (nunit[i] + 1) * nunit[i+1];
len2 += nunit[i] + 1;
}
/* weights */
p1=(float *) calloc(len1+1,sizeof(float));
/* output */
p2=(float *) calloc(len2+1,sizeof(float));
/* errors */
p3=(float *) calloc(len2+1,sizeof(float));
/* delw */
p4=(float *) calloc(len1+1,sizeof(float));
/* set up initial pointers */
wtptr[0] = p1;
outptr[0] = p2;
errptr[0] = p3;
delw[0] = p4;
/* set up the rest of pointers */
for (i=1; i < (nhlayer + 1); i++) {
wtptr[i] = wtptr[i-1] + nunit[i] * (nunit[i-1] + 1);
delw[i] = delw[i-1] + nunit[i] * (nunit[i-1] + 1);
}
for (i=1; i < (nhlayer + 2); i++) {
outptr[i] = outptr[i-1] + nunit[i-1] + 1;
errptr[i] = errptr[i-1] + nunit[i-1] + 1;
}
/* set up threshold outputs */
for (i=0; i < nhlayer + 1; i++) {
*(outptr[i] + nunit[i]) = 1.0;
}
}
/* initialize weights with random
numbers between -0.5 and +0.5 */
void initwt()
{
int i, j;
for (j=0; j < nhlayer + 1; j++)
for (i=0; i < (nunit[j] + 1) * nunit[j + 1]; i++) {
*(wtptr[j] + i) = random() / pow(2.0,15.0) - 0.5;
*(delw[j] + i) = 0.0;
}
}
/* specify architecture of net and
values of learning parameters */
void set_up()
{
int i;
eta = 0.9;
printf("\nMomentum rate eta (default = 0.9)?: ");
scanf("%f", &eta);
alpha = 0.7;
printf("\nLearning rate alpha (default = 0.7)?: ");
scanf("%f", &alpha);
maxe = 0.01; maxep = 0.001;
printf("\nMax total error (default = 0.01)?: ");
scanf("%f", &maxe);
printf("\nMax individual error (default = 0.001)?: ");
scanf("%f", &maxep);
cnt_num = 1000;
printf("\nMax number of iterations (default = 1000)?: ");
scanf("%d", &cnt_num);
printf("\nNumber of hidden layers?: ");
scanf("%d", &nhlayer);
for (i=0; i < nhlayer; i++) {
printf("\n\tNumber of units for hidden layer %d?: ", i+1);
scanf("%d", &nunit[i+1]);
}
printf("\nCreate error file? (Enter 1 for yes, 0 for no) : ");
scanf("%d", &fplot10);
printf("\nExecution starts ");
printf(" -- if many iterations specified, go out for coffee...\n");
nunit[nhlayer+1] = noutattr;
nunit[0] = ninattr;
}
/* read file for net architecture and learning
parameters. File name has suffix _v.dat */
void dread(char *taskname)
{
int i,j,c;
char var_file_name[20];
strcpy(var_file_name, taskname);
strcat(var_file_name, "_v.dat");
if (( fp1 = fopen(var_file_name, "r")) == NULL)
{
perror("\n Cannot open data file ");
exit(0);
}
fscanf(fp1, "%d%d%d%f%f%d%d", &ninput, &noutattr, &ninattr,
&eta, &alpha, &nhlayer, &cnt_num);
for (i=0; i < nhlayer + 2; i++)
fscanf(fp1, "%d", &nunit[i]);
if ((c=fclose(fp1)) != 0)
printf("\nFile %s cannot be closed; error %d ",
var_file_name, c);
}
/* read file containing weights and thresholds
and thresholds. File name has suffix _w.dat */
void wtread(char *taskname)
{
int i,j,c;
char wt_file_name[20];
strcpy(wt_file_name, taskname);
strcat(wt_file_name, "_w.dat");
if (( fp2 = fopen(wt_file_name, "r")) == NULL)
{
perror("\n Cannot open data file ");
exit(0);
}
for (i=0; i < nhlayer + 1; i++) {
for (j=0; j < (nunit[i] + 1) * nunit[i + 1]; j++) {
fscanf(fp2, "%f", (wtptr[i]+j));
}
}
if ((c = fclose(fp2)) != 0)
printf("\n File %sf cannot be closed; error %d ",
wt_file_name, c);
}
/* create file for net architecture and learning
parameters. File name has suffix _v.dat */
void dwrite(char *taskname)
{
int i,j,c;
char var_file_name[20];
strcpy(var_file_name, taskname);
strcat(var_file_name, "_v.dat");
if ((fp1 = fopen(var_file_name, "w+")) == NULL)
{
perror(" Cannot open data file ");
exit(0);
}
fprintf( fp1, "%u %u %u %f %f %u %u\n", ninput, noutattr,
ninattr, eta, alpha, nhlayer, cnt_num);
for (i=0; i < nhlayer + 2; i++) {
fprintf(fp1, "%d ", nunit[i]);
}
fprintf(fp1, "\n%d %f\n", cnt, err_curr);
for (i=0; i < ninput; i++)
{
for (j=0; j < noutattr; j++)
fprintf(fp1, "%f ", outpt[i][j]);
fprintf(fp1, "\n");
}
if ((c=fclose(fp1)) != 0)
printf("\nFile %s cannot be closed; error %d ",
var_file_name, c);
}
/* create file for saving weights and thresholds
learned from training. File name has suffix
_w.dat */
void wtwrite(char *taskname)
{
int i,j,c,k;
char wt_file_name[20];
strcpy(wt_file_name, taskname);
strcat(wt_file_name, "_w.dat");
if ((fp2 = fopen(wt_file_name, "w+")) == NULL)
{
perror("\nCannot open data file ");
exit(0);
}
k=0;
for (i=0; i < nhlayer + 1; i++)
for (j=0; j < (nunit[i] + 1) * nunit[i + 1]; j++) {
if(k==8) {
k=0;
fprintf(fp2, "\n");
}
fprintf(fp2, "%f ", *(wtptr[i] + j));
k++;
}
if ((c=fclose(fp2)) != 0)
printf("\nFile %s cannot be closed; error %d ",
wt_file_name, c);
}
/* bottom_up calculation of net for input
pattern i */
void forward(int i)
{
int m,n,p,offset;
float net;
/* input level output calculation */
for (m=0; m < ninattr; m++)
*(outptr[0]+m) = input[i][m];
/* hidden & output layer output calculation */
for (m=1; m < nhlayer + 2; m++) {
for (n=0; n < nunit[m]; n++) {
net = 0.0;
for (p=0; p < nunit[m-1] + 1; p++) {
offset = (nunit[m-1] + 1) * n + p;
net += *(wtptr[m-1] + offset) *
(*(outptr[m-1] + p));
}
*(outptr[m]+n) = 1 / (1 + exp(-net));
}
}
for (n=0; n < nunit[nhlayer + 1]; n++)
outpt[i][n] = *(outptr[nhlayer + 1] + n);
}
/* several conditions are checked to see
whether learning should terminate */
int introspective(int nfrom, int nto)
{
int i, flag;
/* reached max. iteration */
if (cnt >= cnt_num) return(FEXIT);
/* error for each pattern small enough? */
nsnew = 0;
flag = 1;
for (i = nfrom; (i < nto) && (flag == 1); i++) {
if (ep[i] <= maxep) nsnew++;
else flag = 0;
}
if (flag == 1) return (SEXIT);
/* system total error small enough? */
if (err_curr <= maxe) return (SEXIT);
return(CONTNE);
}
/* threshold is treated as weight of link from
a virtual node whose output value is unity */
int rumelhart(int from_snum, int to_snum)
{
int i,j,k,m,n,p,offset,index;
float out;
char *err_file = "criter.dat";
nsold = 0;
cnt = 0;
result = CONTNE;
if (fplot10)
if ((fp3 = fopen(err_file, "w")) == NULL)
{
perror( "\nCannot open error file ");
exit(0);
}
do {
err_curr = 0.0;
/* for each pattern */
for (i=from_snum; i < to_snum; i++) {
forward(i); /* bottom_up calculation */
/* top_down error propagation */
/* output_level error */
for (m=0; m < nunit[nhlayer + 1]; m++) {
out = *(outptr[nhlayer + 1] + m);
*(errptr[nhlayer + 1] + m) = (target[i][m] - out) *
(1 - out) * out;
}
/* hidden & input layer errors */
for (m=nhlayer + 1; m >= 1; m--) {
for (n=0; n < nunit[m-1]+1; n++) {
*(errptr[m-1] + n) = 0.0;
for (p=0; p < nunit[m]; p++) {
offset = (nunit[m-1] + 1) * p + n;
*(delw[m-1]+offset) = eta * (*(errptr[m]+p))
* (*(outptr[m-1] + n))
+ alpha * (*(delw[m-1] + offset));
*(errptr[m-1]+n) += *(errptr[m] + p)
* (*(wtptr[m-1] + offset));
}
*(errptr[m-1] + n) = *(errptr[m-1] + n) *
(1 - *(outptr[m-1] + n))
* (*(outptr[m-1] + n));
}
}
/* weight changes */
for (m=1; m < nhlayer + 2; m++) {
for (n=0; n < nunit[m]; n++) {
for (p=0; p < nunit[m-1] + 1; p++) {
offset = (nunit[m-1] + 1) * n + p;
*(wtptr[m-1] + offset) += *(delw[m-1] + offset);
}
}
}
ep[i] = 0.0;
for (m=0; m < nunit[nhlayer + 1]; m++) {
ep[i] += fabs((target[i][m] -
*(outptr[nhlayer+1] + m)));
}
err_curr += ep[i] * ep[i];
} /* normalized system error */
err_curr = 0.5 * err_curr / ninput;
/** save errors in file to draw the
system error with plot10 **/
if (fplot10)
fprintf(fp3, "%1d, %2.9f\n", cnt, err_curr);
cnt++;
/* check condition for terminating learning */
result = introspective(from_snum, to_snum);
} while (result == CONTNE); /* end of long do-while */
/* update output with changed weights */
for (i=from_snum; i < to_snum; i++) forward(i);
for (i=0; i < nhlayer + 1; i++) {
index = 0;
for (j=0; j < nunit[i+1]; j++)
{
printf("\n\nWeights between unit %d of layer %d",
j, j+1);
printf(" and units of layer %d\n", i);
for (k=0; k < nunit[i]; k++)
printf(" %f", *(wtptr[i] + index++));
printf("\n Threshold of unit %d of layer %d is %f",
j, i+1, *(wtptr[i] + index++));
}
}
for (i=0; i < ninput; i++)
for (j=0; j < noutattr; j++)
printf("\n\n sample %d output %d = %f target %d = %f",
i, j, outpt[i][j],j,target[i][j]);
printf("\n\nTotal number of iterations is %d", cnt);
printf("\nNormalized system error is %f\n\n\n", err_curr);
return(result);
}
/* read in the input data file specified
by user during interactive session */
void user_session()
{
int i,j,showdata;
char fnam[20], dtype[20];
FILE *fp;
printf("\n Start of learning session");
/* for task with name task_name, input
data file of the task is automatically
set to be task_name.dat by program */
printf("\n Enter the task name : ");
scanf("%s", task_name);
printf("\n How many features in input pattern?: ");
scanf("%d", &ninattr);
printf("\n How many output units?: ");
scanf("%d", &noutattr);
printf("\n Total number of input samples?: ");
scanf("%d", &ninput);
strcpy(fnam, task_name);
strcat(fnam, ".dat");
printf("\n Input file name is %s \n", fnam);
if ((fp = fopen(fnam, "r")) == NULL)
{
printf("\nFile %s does not exist", fnam);
exit(0);
}
printf("\n Do you want to look at data just read? (Y/N): " );
scanf("%s", dtype);
showdata = ((dtype[0] == 'y') || (dtype[0] == 'Y'));
for (i=0; i < ninput; i++) {
for (j=0; j < ninattr; j++) {
fscanf(fp, "%f", &input[i][j]);
if (showdata) printf("%f ", input[i][j]);
}
for (j=0; j < noutattr; j++) {
fscanf(fp, "%f", &target[i][j]);
if (showdata) printf("%f\n", target[i][j]);
}
}
if ((i = fclose(fp)) != 0)
{
printf("\nFile %s cannot be closed; error %d ", fnam, i);
exit(0);
}
}
/* main body of learning */
void learning()
{
int result;
user_session();
set_up();
init();
do {
initwt();
result = rumelhart(0,ninput);
} while (result == RESTRT);
if (result == FEXIT)
{
printf("\n Max number of iterations reached, but failed");
printf("\n to decrease system error sufficiently...\n");
}
dwrite(task_name);
wtwrite(task_name);
}
/* main body of output generation */
void output_generation()
{
int i,m,nsample;
char ans[10];
char dfile[20];
/* If task is already in the memory, data files
for task do not need to be read in. But, if it
is a new task, data files should be read in to
reconstruct the net. */
printf("\nGeneration of outputs for a new pattern");
printf("\n\t Present task name is %s", task_name);
printf("\n\t Work on a different task? (Y or N): ");
scanf("%s", ans);
if ((ans[0]=='y') || (ans[0]=='Y'))
{
printf("\n\t Please enter the task name: ");
scanf("%s", task_name);
dread(task_name);
init();
wtread(task_name);
}
/* input data for output generation
are created */
printf("\nEnter file name for patterns to be processed: ");
scanf("%s", dfile);
if ((fp1=fopen(dfile, "r")) == NULL )
{
perror(" Cannot open dfile ");
exit(0);
}
printf("\nEnter number of patterns for processing: ");
scanf("%d", &nsample);
for (i=0; i < nsample; i++)
for (m=0; m < ninattr; m++)
fscanf(fp1, "%f", &input[i][m]);
/* output generation calculation starts */
for (i=0; i < nsample; i++)
{
forward(i);
for (m=0; m < noutattr; m++)
printf("\n sample %d output %d = %f",
i, m, *(outptr[nhlayer + 1] + m));
printf("\n");
}
printf("\nOutputs have been generated ");
if ((i=fclose(fp1)) != 0)
printf("\nFile %s cannot be closed; error %d", dfile, i);
}
/************************ MAIN *********************************/
void main()
{
char select[20], cont[10];
strcpy(task_name, "*********");
do {
printf("\n**Select L(earning) or O(utput generation)**\n");
do {
scanf ("%s", select);
switch (select[0]) {
case 'o':
case 'O': output_generation();
break;
case 'l':
case 'L': learning();
break;
default : printf("\n Please answer");
printf(" learning or output generation ");
break;
}
} while ((select[0] != 'o') && (select[0] != 'O')
&& (select[0] != 'l') && (select[0] != 'L'));
printf("\nDo you want to continue? ");
scanf( "%s", cont);
} while ((cont[0] == 'y') || (cont[0] == 'Y'));
printf("\n\nIt is all finished. ");
printf("\nGood bye...\n\n\n ");
}